Let’s Build a Chatbot

Using GitHub Copilot, ChatGPT, & more in your favorite IDE

James Wade

Creating OpenAI API Calls

An Example from OpenAI Documentation

curl https://api.openai.com/v1/chat/completions \
-H "Content-Type: application/json" \
-H "Authorization: Bearer $OPENAI_API_KEY" \
-d '{
"model": "gpt-3.5-turbo",
"messages": [{"role": "system", "content": "You are a helpful assistant."}, {"role": "user", "content": "Hello!"}]
}'

Constructing Messages for OpenAI

The message body:

{
  "model": "gpt-3.5-turbo",
  "messages": [
    {"role": "system", "content": "You are a helpful assistant."},
    {"role": "user", "content": "Hello!"}
  ]
}

Send requests with {httr2}

library(httr2)
library(purrr)

Send requests with {httr2}

library(httr2)
library(purrr)

# construct the message body
user_message <- list(list(role = "user", content = "Hello"))
body <- list(model = "gpt-3.5-turbo", messages = user_message)
api_key <- Sys.getenv("OPENAI_API_KEY")

Send requests with {httr2}

library(httr2)
library(purrr)

# construct the message body
user_message <- list(list(role = "user", content = "Hello!"))
body <- list(model = "gpt-3.5-turbo", messages = user_message)
api_key <- Sys.getenv("OPENAI_API_KEY")

# send the request
resp <-
  request("https://api.openai.com/v1") |> 
  req_url_path_append("chat/completions") |> 
  req_auth_bearer_token(token = api_key) |> 
  req_body_json(body) |> 
  req_perform()

Send requests with {httr2}

# construct the message body
user_message <- list(list(role = "user", content = "Hello!"))
body <- list(model = "gpt-3.5-turbo", messages = user_message)
api_key <- Sys.getenv("OPENAI_API_KEY")

# send the request
resp <-
  request("https://api.openai.com/v1") |>
  req_url_path_append("chat/completions") |> 
  req_auth_bearer_token(token = api_key) |> 
  req_body_json(body) |> 
  req_perform()

# process the repsonse
resp |>
  resp_body_json(simplifyVector = TRUE) |> 
  pluck("choices", "message", "content")
[1] "Hello! How can I assist you today?"

Examining the Response

resp |> 
  resp_body_json(simplifyVector = TRUE)
$id
[1] "chatcmpl-8qqJowQNNZnsgNK3F7VfG0nQKrF1y"

$object
[1] "chat.completion"

$created
[1] 1707604860

$model
[1] "gpt-3.5-turbo-0613"

$choices
  index message.role                    message.content logprobs finish_reason
1     0    assistant Hello! How can I assist you today?       NA          stop

$usage
$usage$prompt_tokens
[1] 9

$usage$completion_tokens
[1] 9

$usage$total_tokens
[1] 18


$system_fingerprint
NULL

Wrapping it in a function

library(httr2)
library(purrr)

chat <- function(message, api_key = Sys.getenv("OPENAI_API_KEY")) {
  user_message <- list(list(role = "user", content = message))
  body <- list(model = "gpt-3.5-turbo",
               messages = user_message)
  resp <-
    request("https://api.openai.com/v1") |> 
    req_url_path_append("chat/completions") |> 
    req_auth_bearer_token(token = api_key) |> 
    req_body_json(body) |> 
    req_perform()
  
  resp |> 
    resp_body_json(simplifyVector = TRUE) |> 
    pluck("choices", "message", "content")
}

Trying out chat()

chat("What is your favorite color?")
[1] "As an artificial intelligence, I do not have personal preferences or the ability to perceive colors."

chat("Show me a simple ggplot2 example. Only code with comments. Be brief.")
[1] "Sure! Here is a simple example using ggplot2 to plot the distribution of a continuous variable:\n\n```R\nlibrary(ggplot2)\n\n# Create a dataframe with a continuous variable\ndata <- data.frame(\n  x = rnorm(100)\n)\n\n# Plot a histogram of the variable\nggplot(data, aes(x)) +\n  geom_histogram()\n```\n\nThis code first loads the ggplot2 library. Then, a dataframe `data` is created with a single continuous variable called `x`, which is generated by sampling from a normal distribution. Finally, the `ggplot()` function is used to create a plot, specifying the `data` dataframe and the variable `x` to be plotted as a histogram using the `geom_histogram()` function."

A Prettier Reponse

answer <- chat("Make a ggplot2 in an RMarkdown document and briefly tell me
               what you made.")
answer |> cat()
```{r, fig.width=5, fig.height=4}
library(ggplot2)

# Create a dataset
data <- data.frame(
  x = c(1, 2, 3, 4, 5),
  y = c(1, 4, 9, 16, 25)
)

# Create a scatter plot
ggplot(data, aes(x, y)) +
  geom_point() +
  labs(
    title = "Scatter Plot",
    x = "X",
    y = "Y"
  )
```

In the above code, I created a scatter plot using `ggplot2` in an RMarkdown document. The dataset consists of five data points with x-coordinates being the numbers 1 to 5 and y-coordinates being the squared values of their respective x-coordinates. The scatter plot displays these data points as individual points on the graph. The title of the plot is "Scatter Plot," with the x-axis labeled as "X" and the y-axis labeled as "Y."

An Even Prettier Response

answer |> shiny::markdown()
library(ggplot2)

# Create a dataset
data <- data.frame(
  x = c(1, 2, 3, 4, 5),
  y = c(1, 4, 9, 16, 25)
)

# Create a scatter plot
ggplot(data, aes(x, y)) +
  geom_point() +
  labs(
    title = "Scatter Plot",
    x = "X",
    y = "Y"
  )

In the above code, I created a scatter plot using ggplot2 in an RMarkdown document. The dataset consists of five data points with x-coordinates being the numbers 1 to 5 and y-coordinates being the squared values of their respective x-coordinates. The scatter plot displays these data points as individual points on the graph. The title of the plot is "Scatter Plot," with the x-axis labeled as "X" and the y-axis labeled as "Y."

Some Helper Functions

chat()

chat <- function(user_message, 
                 history = NULL,
                 system_prompt = c("general", "code"),
                 api_key = Sys.getenv("OPENAI_API_KEY")) {
  system   <- get_system_prompt(system_prompt)
  prompt   <- prepare_prompt(user_message, system_prompt, history)
  base_url <- "https://api.openai.com/v1"
  body     <- list(model = "gpt-3.5-turbo",
                   messages = prompt)
  
  # <httr2_request_pipeline>
  # <process_response>
}

Helper Functions

get_system_prompt()

get_system_prompt <- function(system = c("general", "code")) {
  instructions <- 
    switch(system,
           "general" = "You are a helpful assistant.",
           "code"    = "<code_assistant_prompt>")
  list(list(role = "system", content = instructions))
}


prepare_prompt()

prepare_prompt <- function(user_message, system_prompt, history) {
  user_prompt <-  list(list(role = "user", content = user_message))
  c(system_prompt, history, user_prompt) |> compact()
}